function [p] = polyfit(x,y,n) //!%POLYFIT Fit polynomial to data. //!% POLYFIT(X,Y,N) finds the coefficients of a polynomial P(X) of //!% degree N that fits the data, P(X(I))~=Y(I), in a least-squares sense. //!% //!% [P] = POLYFIT(X,Y,N) returns the polynomial coefficients P //!% //!% See also POLYVAL. //!% The regression problem is formulated in matrix format as: //!% //!% y = V*p or //!% //!% 3 2 //!% y = [x x x 1] [p3 //!% p2 //!% p1 //!% p0] //!% //!% where the vector p contains the coefficients to be found. For a //!% 7th order polynomial, matrix V would be: //!% //!% V = [x.^7 x.^6 x.^5 x.^4 x.^3 x.^2 x ones(size(x))]; if size(x)~=size(y) error('X and Y vectors must be the same size.') end x = x(:); y = y(:); //% Construct Vandermonde matrix. V(1:length(x),n+1) = ones(length(x),1); for j = n:-1:1 V(:,j) = x.*V(:,j+1); end //!% Solve least squares problem, and save the Cholesky factor. [Q,R] = qr(V,0); p = R\(Q'*y); //!% Same as p = V\y; if size(R,2) > size(R,1) warning('Polynomial is not unique; degree >= number of data points.') elseif size(R,1)~= size(R,2) // warning('Polynomial approximation; degree < number of data points.') elseif 1/rcond(R) > 1.0e10 warning(sprintf( ... ['Polynomial is badly conditioned. Remove repeated data points\n' ... ' or try centering and scaling as described in HELP POLYFIT.'])) end r = y - V*p; p = p.'; //!% Polynomial coefficients are row vectors by convention.